library(tidyverse)
library(ggdist)
library(ggside)
library(easystats)
library(patchwork)
illusion1 <- read.csv("../data/raw_illusion1.csv") |>
mutate(
Illusion_Effect = fct_relevel(as.factor(Illusion_Effect), "Incongruent", "Congruent"),
Block = as.factor(Block)
)
illusion2 <- read.csv("../data/raw_illusion2.csv") |>
mutate(
Illusion_Effect = fct_relevel(as.factor(Illusion_Effect), "Incongruent", "Congruent"),
Block = as.factor(Block)
)
perceptual <- read.csv("../data/raw_perceptual.csv") |>
mutate(
Block = as.factor(Block)
)
sub <- read.csv("../data/raw_questionnaires.csv") |>
mutate(
Education = fct_relevel(Education, "Prefer not to say", "Other", "High school", "Bachelor", "Master", "Doctorate")
)
# For prolific:
# bayestestR::point_estimate(sub$Duration_Session1)
# plot(bayestestR::estimate_density(sub$Duration_Session1))
# Dear participant, thank you for participating in our study. Unfortunately, our system detected multiple issues in your data (such as implausibly short responses - in particular in the 2nd part of the study, random-like pattern of answers, or the same response to different scales - as some were designed to trigger various answers), which makes it unusable. We understand that you might have been in a hurry or had some other issues, and so we kindly ask you to return your participation; we hope to open-up more slots in the future would you be interested to participate again.
# Dear participant, thank you for participating in our study. Unfortunately, our system detected multiple issues in your data (such as implausibly short responses - in particular in the 2nd part of the study, random-like pattern of answers, or the same response to different scales - as some were designed to trigger various answers), which makes it unusable for us. We understand that you might have been in a hurry or had some other issues; we hope to open-up more slots in the future would you be interested to participate again.
outliers_perceptual <- c(
"S003",
"S008"
)
outliers_perceptual_block2 <- c(
"S226",
"S350"
)
outliers_illusion1 <- c(
"S008",
"S278",
"S389"
)
outliers_illusion1_block2 <- c(
"S153",
"S226",
"S350"
)
outliers_illusion2 <- c(
"S003"
)
We removed 3, 2, and 1 participants for the illusion task - session 1, perceptual task, and illusion task - session 2 respectively, upon inspection of the average error rage (when close to 50%, suggesting random answers) and/or when the reaction time distribution was implausibly fast.
data <- rbind(illusion1, illusion2, perceptual) |>
filter(RT < 10) |>
mutate(
Participant = fct_rev(Participant),
Task = fct_relevel(Task, "Illusion_Session1", "Perceptual", "Illusion_Session2")
)
table <- data |>
group_by(Participant, Task) |>
summarize(
Error = sum(Error) / n(),
RT = mean(RT)
) |>
ungroup() |>
arrange(desc(Error)) |>
tidyr::pivot_wider(names_from = "Task", values_from = c("Error", "RT"), names_vary = "slowest") |>
datawizard::data_relocate(ends_with("Session2"), after = -1) |>
datawizard::data_relocate(ends_with("Session1"), after = 1) |>
arrange(desc(Error_Illusion_Session1))
data.frame(Participant = c("Average"), t(sapply(table[2:ncol(table)], mean, na.rm = TRUE))) |>
rbind(table) |>
knitr::kable() |>
kableExtra::row_spec(1, italic = TRUE, background = "grey", color = "white") |>
kableExtra::row_spec(which(table$Participant %in% c(outliers_perceptual, outliers_illusion1, outliers_illusion2)) + 1, background = "#EF9A9A") |>
kableExtra::row_spec(which(table$Participant %in% c(outliers_illusion1_block2, outliers_perceptual_block2)) + 1, background = "orange") |>
# kableExtra::column_spec(2, color="white",
# background = kableExtra::spec_color(c(NA, table$Error_Illusion_Session1))) |>
kableExtra::kable_styling(full_width = TRUE) |>
kableExtra::scroll_box(width = "100%", height = "500px")
| Participant | Error_Illusion_Session1 | RT_Illusion_Session1 | Error_Perceptual | RT_Perceptual | RT_Illusion_Session2 | Error_Illusion_Session2 |
|---|---|---|---|---|---|---|
| Average | 0.188 | 0.768 | 0.066 | 0.671 | 0.731 | 0.265 |
| S389 | 0.487 | 0.185 | 0.156 | 0.647 | ||
| S008 | 0.417 | 0.610 | 0.500 | 0.347 | ||
| S110 | 0.391 | 1.519 | 0.323 | 2.553 | ||
| S278 | 0.391 | 0.349 | 0.844 | 0.494 | ||
| S124 | 0.383 | 0.708 | 0.167 | 0.606 | ||
| S319 | 0.372 | 0.787 | 0.042 | 0.656 | ||
| S153 | 0.370 | 0.575 | 0.109 | 0.699 | ||
| S198 | 0.357 | 0.755 | 0.312 | 0.683 | ||
| S226 | 0.354 | 0.365 | 0.266 | 0.366 | ||
| S002 | 0.346 | 0.713 | 0.115 | 0.872 | 0.945 | 0.396 |
| S023 | 0.346 | 0.631 | 0.125 | 0.619 | ||
| S408 | 0.341 | 0.597 | 0.146 | 0.594 | ||
| S362 | 0.339 | 0.764 | 0.083 | 0.751 | ||
| S232 | 0.336 | 1.223 | 0.219 | 0.802 | ||
| S299 | 0.323 | 0.712 | 0.214 | 0.584 | ||
| S437 | 0.318 | 1.213 | 0.068 | 0.936 | ||
| S070 | 0.314 | 1.118 | 0.149 | 1.014 | ||
| S422 | 0.312 | 0.511 | 0.203 | 0.480 | ||
| S117 | 0.305 | 0.490 | 0.182 | 0.412 | ||
| S307 | 0.297 | 0.573 | 0.146 | 0.681 | ||
| S146 | 0.297 | 0.619 | 0.042 | 0.709 | ||
| S151 | 0.292 | 0.570 | 0.177 | 0.542 | ||
| S328 | 0.289 | 0.543 | 0.130 | 0.611 | ||
| S144 | 0.289 | 0.737 | 0.146 | 0.691 | ||
| S080 | 0.286 | 0.477 | 0.120 | 0.532 | ||
| S479 | 0.284 | 0.459 | 0.047 | 0.523 | ||
| S266 | 0.284 | 0.601 | 0.104 | 0.717 | ||
| S132 | 0.284 | 0.469 | 0.135 | 0.446 | ||
| S178 | 0.281 | 0.542 | 0.146 | 0.507 | ||
| S350 | 0.280 | 0.733 | 0.351 | 0.314 | ||
| S246 | 0.279 | 0.445 | 0.073 | 0.412 | ||
| S135 | 0.279 | 0.516 | 0.125 | 0.504 | ||
| S087 | 0.279 | 0.576 | 0.151 | 0.589 | ||
| S359 | 0.276 | 0.669 | 0.078 | 0.598 | ||
| S341 | 0.271 | 0.563 | 0.042 | 0.585 | ||
| S152 | 0.266 | 0.947 | 0.078 | 0.549 | ||
| S290 | 0.266 | 0.602 | 0.104 | 0.564 | ||
| S224 | 0.263 | 0.683 | 0.214 | 0.494 | ||
| S174 | 0.263 | 0.617 | 0.062 | 0.730 | ||
| S167 | 0.263 | 0.665 | 0.193 | 0.591 | ||
| S021 | 0.263 | 0.544 | 0.250 | 0.703 | ||
| S384 | 0.260 | 0.707 | 0.120 | 0.647 | ||
| S333 | 0.260 | 0.831 | 0.052 | 0.883 | ||
| S059 | 0.260 | 0.542 | 0.021 | 0.488 | ||
| S475 | 0.258 | 0.590 | 0.094 | 0.531 | ||
| S348 | 0.258 | 0.658 | 0.109 | 0.575 | ||
| S251 | 0.258 | 0.632 | 0.052 | 1.205 | ||
| S073 | 0.258 | 0.665 | 0.104 | 0.596 | ||
| S006 | 0.258 | 0.529 | 0.089 | 0.620 | 0.723 | 0.216 |
| S195 | 0.255 | 0.637 | 0.068 | 0.564 | ||
| S154 | 0.254 | 0.615 | 0.141 | 0.528 | ||
| S334 | 0.253 | 0.932 | 0.052 | 0.838 | ||
| S305 | 0.253 | 0.613 | 0.188 | 0.574 | ||
| S301 | 0.253 | 0.711 | 0.036 | 0.580 | ||
| S173 | 0.253 | 0.754 | 0.062 | 0.760 | ||
| S140 | 0.253 | 0.591 | 0.068 | 0.636 | ||
| S136 | 0.253 | 0.549 | 0.078 | 0.528 | ||
| S123 | 0.253 | 0.815 | 0.083 | 0.665 | ||
| S367 | 0.250 | 0.551 | 0.099 | 0.584 | ||
| S270 | 0.250 | 0.798 | 0.026 | 0.646 | ||
| S166 | 0.250 | 0.520 | 0.120 | 0.472 | ||
| S129 | 0.250 | 0.674 | 0.062 | 0.572 | ||
| S094 | 0.250 | 0.602 | 0.089 | 0.514 | ||
| S337 | 0.247 | 0.760 | 0.151 | 0.537 | ||
| S162 | 0.247 | 0.509 | 0.016 | 0.595 | ||
| S133 | 0.247 | 0.493 | 0.099 | 0.583 | ||
| S112 | 0.247 | 0.490 | 0.073 | 0.492 | ||
| S419 | 0.245 | 0.606 | 0.115 | 0.592 | ||
| S306 | 0.245 | 0.557 | 0.141 | 0.419 | ||
| S229 | 0.245 | 0.759 | 0.036 | 0.642 | ||
| S053 | 0.245 | 0.511 | 0.151 | 0.475 | ||
| S281 | 0.242 | 0.544 | 0.109 | 0.465 | ||
| S048 | 0.242 | 0.502 | 0.052 | 0.502 | ||
| S330 | 0.240 | 0.603 | 0.078 | 0.462 | ||
| S203 | 0.240 | 0.825 | 0.047 | 0.633 | ||
| S101 | 0.240 | 0.427 | 0.177 | 0.406 | ||
| S347 | 0.237 | 0.643 | 0.083 | 0.543 | ||
| S329 | 0.237 | 1.084 | 0.036 | 0.918 | ||
| S316 | 0.237 | 0.543 | 0.104 | 0.594 | ||
| S156 | 0.237 | 0.665 | 0.089 | 0.524 | ||
| S025 | 0.237 | 0.713 | 0.073 | 0.598 | ||
| S279 | 0.234 | 0.597 | 0.099 | 0.554 | ||
| S211 | 0.234 | 0.561 | 0.057 | 0.590 | ||
| S007 | 0.234 | 0.894 | 0.089 | 0.770 | ||
| S177 | 0.233 | 0.701 | 0.130 | 0.795 | ||
| S484 | 0.232 | 0.541 | 0.073 | 0.498 | ||
| S373 | 0.232 | 0.729 | 0.047 | 0.763 | ||
| S259 | 0.232 | 0.715 | 0.135 | 0.613 | ||
| S247 | 0.232 | 0.497 | 0.021 | 0.473 | ||
| S233 | 0.232 | 0.517 | 0.062 | 0.530 | ||
| S217 | 0.232 | 0.558 | 0.089 | 0.482 | ||
| S199 | 0.232 | 0.591 | 0.120 | 0.566 | ||
| S163 | 0.232 | 0.658 | 0.047 | 0.738 | ||
| S036 | 0.232 | 0.845 | 0.068 | 0.749 | ||
| S402 | 0.229 | 0.785 | 0.036 | 0.813 | ||
| S287 | 0.229 | 0.661 | 0.099 | 0.481 | ||
| S083 | 0.229 | 0.588 | 0.042 | 0.561 | ||
| S063 | 0.228 | 0.885 | 0.042 | 0.797 | ||
| S324 | 0.227 | 0.739 | 0.047 | 0.637 | ||
| S014 | 0.227 | 0.641 | 0.047 | 0.548 | ||
| S276 | 0.224 | 0.535 | 0.047 | 0.538 | ||
| S248 | 0.224 | 0.483 | 0.083 | 0.478 | ||
| S244 | 0.224 | 0.575 | 0.172 | 0.476 | ||
| S235 | 0.224 | 0.524 | 0.057 | 0.491 | ||
| S103 | 0.224 | 0.613 | 0.036 | 0.562 | ||
| S097 | 0.224 | 0.791 | 0.068 | 0.528 | ||
| S019 | 0.224 | 0.618 | 0.052 | 0.496 | ||
| S311 | 0.221 | 0.450 | 0.234 | 0.401 | ||
| S497 | 0.221 | 0.819 | 0.052 | 0.789 | ||
| S472 | 0.221 | 0.857 | 0.089 | 0.747 | ||
| S320 | 0.221 | 0.565 | 0.104 | 0.690 | ||
| S190 | 0.221 | 0.724 | 0.021 | 0.674 | ||
| S046 | 0.221 | 0.620 | 0.073 | 0.553 | ||
| S042 | 0.221 | 0.560 | 0.161 | 0.493 | ||
| S289 | 0.220 | 0.883 | 0.089 | 0.566 | ||
| S390 | 0.219 | 0.805 | 0.036 | 0.810 | ||
| S139 | 0.219 | 0.605 | 0.052 | 0.537 | ||
| S109 | 0.219 | 0.736 | 0.135 | 0.641 | ||
| S092 | 0.217 | 1.202 | 0.036 | 0.810 | ||
| S466 | 0.216 | 0.582 | 0.047 | 0.570 | ||
| S332 | 0.216 | 0.727 | 0.083 | 0.631 | ||
| S331 | 0.216 | 0.542 | 0.089 | 0.477 | ||
| S295 | 0.216 | 0.949 | 0.021 | 0.582 | ||
| S294 | 0.216 | 0.625 | 0.130 | 0.689 | ||
| S157 | 0.216 | 0.571 | 0.057 | 0.521 | ||
| S079 | 0.216 | 0.586 | 0.052 | 0.588 | ||
| S207 | 0.214 | 0.669 | 0.083 | 0.578 | ||
| S200 | 0.214 | 0.575 | 0.094 | 0.523 | ||
| S131 | 0.214 | 0.572 | 0.089 | 0.584 | ||
| S102 | 0.214 | 0.514 | 0.120 | 0.526 | ||
| S250 | 0.213 | 0.761 | 0.147 | 0.803 | ||
| S003 | 0.211 | 0.853 | 0.214 | 0.441 | 0.331 | 0.371 |
| S488 | 0.211 | 1.118 | 0.036 | 0.878 | ||
| S458 | 0.211 | 0.626 | 0.047 | 0.548 | ||
| S452 | 0.211 | 0.546 | 0.031 | 0.600 | ||
| S448 | 0.211 | 0.543 | 0.115 | 0.556 | ||
| S206 | 0.211 | 0.792 | 0.083 | 0.722 | ||
| S188 | 0.211 | 0.865 | 0.047 | 1.043 | ||
| S107 | 0.211 | 0.808 | 0.036 | 0.722 | ||
| S064 | 0.211 | 0.885 | 0.073 | 0.714 | ||
| S303 | 0.209 | 0.655 | 0.031 | 0.596 | ||
| S454 | 0.208 | 0.749 | 0.047 | 0.591 | ||
| S453 | 0.208 | 0.714 | 0.125 | 0.570 | ||
| S417 | 0.208 | 0.626 | 0.036 | 0.641 | ||
| S371 | 0.208 | 0.593 | 0.057 | 0.496 | ||
| S356 | 0.208 | 0.634 | 0.115 | 0.584 | ||
| S298 | 0.208 | 0.551 | 0.083 | 0.527 | ||
| S128 | 0.208 | 0.596 | 0.115 | 0.603 | ||
| S093 | 0.208 | 0.516 | 0.057 | 0.512 | ||
| S043 | 0.208 | 0.688 | 0.057 | 0.823 | ||
| S214 | 0.206 | 0.679 | 0.042 | 0.591 | ||
| S481 | 0.206 | 0.559 | 0.026 | 0.562 | ||
| S469 | 0.206 | 0.799 | 0.094 | 0.565 | ||
| S377 | 0.206 | 0.721 | 0.083 | 0.542 | ||
| S366 | 0.206 | 0.656 | 0.021 | 0.675 | ||
| S336 | 0.206 | 0.691 | 0.104 | 0.560 | ||
| S312 | 0.206 | 0.993 | 0.073 | 0.619 | ||
| S265 | 0.206 | 0.972 | 0.057 | 0.806 | ||
| S175 | 0.206 | 1.272 | 0.109 | 1.099 | ||
| S145 | 0.206 | 0.541 | 0.099 | 0.498 | ||
| S086 | 0.206 | 0.726 | 0.099 | 0.717 | ||
| S283 | 0.204 | 1.663 | 0.177 | 0.682 | ||
| S405 | 0.203 | 0.915 | 0.052 | 0.846 | ||
| S385 | 0.203 | 0.750 | 0.047 | 0.774 | ||
| S304 | 0.203 | 0.749 | 0.120 | 0.534 | ||
| S191 | 0.203 | 0.745 | 0.062 | 0.591 | ||
| S116 | 0.203 | 0.760 | 0.104 | 0.635 | ||
| S113 | 0.203 | 0.758 | 0.073 | 0.643 | ||
| S108 | 0.203 | 0.715 | 0.089 | 0.569 | ||
| S096 | 0.203 | 0.554 | 0.094 | 0.473 | ||
| S499 | 0.201 | 0.595 | 0.156 | 0.501 | ||
| S490 | 0.201 | 0.812 | 0.052 | 0.824 | ||
| S407 | 0.201 | 0.954 | 0.042 | 0.737 | ||
| S272 | 0.201 | 0.578 | 0.073 | 0.501 | ||
| S242 | 0.201 | 0.986 | 0.010 | 0.748 | ||
| S218 | 0.201 | 0.593 | 0.104 | 0.448 | ||
| S172 | 0.201 | 0.567 | 0.057 | 0.470 | ||
| S105 | 0.201 | 0.617 | 0.052 | 0.525 | ||
| S100 | 0.201 | 0.740 | 0.083 | 0.730 | ||
| S066 | 0.201 | 0.859 | 0.031 | 0.728 | ||
| S001 | 0.199 | 0.986 | 0.323 | 0.946 | ||
| S483 | 0.198 | 0.657 | 0.047 | 0.675 | ||
| S409 | 0.198 | 0.630 | 0.115 | 0.578 | ||
| S396 | 0.198 | 0.744 | 0.083 | 0.666 | ||
| S239 | 0.198 | 0.626 | 0.089 | 0.545 | ||
| S477 | 0.195 | 0.553 | 0.083 | 0.571 | ||
| S456 | 0.195 | 0.654 | 0.099 | 0.569 | ||
| S424 | 0.195 | 0.811 | 0.042 | 0.741 | ||
| S375 | 0.195 | 0.635 | 0.083 | 0.573 | ||
| S342 | 0.195 | 0.634 | 0.057 | 0.714 | ||
| S227 | 0.195 | 0.734 | 0.047 | 0.670 | ||
| S130 | 0.195 | 0.660 | 0.021 | 0.606 | ||
| S125 | 0.195 | 0.605 | 0.068 | 0.563 | ||
| S052 | 0.195 | 0.537 | 0.031 | 0.541 | ||
| S495 | 0.193 | 0.761 | 0.042 | 0.713 | ||
| S346 | 0.193 | 0.582 | 0.078 | 0.502 | ||
| S335 | 0.193 | 0.627 | 0.078 | 0.620 | ||
| S260 | 0.193 | 0.671 | 0.042 | 0.560 | ||
| S122 | 0.193 | 0.621 | 0.089 | 0.556 | ||
| S085 | 0.193 | 0.824 | 0.177 | 0.637 | ||
| S047 | 0.191 | 0.823 | 0.094 | 0.624 | ||
| S478 | 0.190 | 0.519 | 0.083 | 0.473 | ||
| S425 | 0.190 | 0.762 | 0.057 | 0.698 | ||
| S418 | 0.190 | 0.857 | 0.016 | 0.827 | ||
| S323 | 0.190 | 0.698 | 0.068 | 0.649 | ||
| S285 | 0.190 | 1.028 | 0.026 | 0.769 | ||
| S237 | 0.190 | 0.859 | 0.031 | 0.733 | ||
| S223 | 0.190 | 0.563 | 0.057 | 0.517 | ||
| S192 | 0.190 | 0.803 | 0.031 | 0.781 | ||
| S072 | 0.190 | 0.925 | 0.057 | 0.960 | ||
| S496 | 0.188 | 0.754 | 0.083 | 0.713 | ||
| S485 | 0.188 | 0.897 | 0.052 | 0.788 | ||
| S411 | 0.188 | 0.966 | 0.005 | 0.685 | ||
| S388 | 0.188 | 0.639 | 0.057 | 0.587 | ||
| S277 | 0.188 | 0.636 | 0.078 | 0.532 | ||
| S082 | 0.188 | 0.470 | 0.125 | 0.410 | ||
| S067 | 0.188 | 0.560 | 0.099 | 0.550 | ||
| S494 | 0.185 | 0.584 | 0.021 | 0.552 | ||
| S441 | 0.185 | 0.700 | 0.036 | 0.585 | ||
| S383 | 0.185 | 1.085 | 0.062 | 0.976 | ||
| S358 | 0.185 | 0.689 | 0.047 | 0.754 | ||
| S345 | 0.185 | 0.608 | 0.057 | 0.604 | ||
| S321 | 0.185 | 0.831 | 0.062 | 0.687 | ||
| S300 | 0.185 | 0.673 | 0.021 | 0.677 | ||
| S296 | 0.185 | 0.565 | 0.068 | 0.476 | ||
| S282 | 0.185 | 0.505 | 0.078 | 0.460 | ||
| S267 | 0.185 | 0.530 | 0.042 | 0.418 | ||
| S149 | 0.185 | 0.638 | 0.062 | 0.595 | ||
| S126 | 0.185 | 0.743 | 0.062 | 0.498 | ||
| S120 | 0.185 | 0.816 | 0.115 | 0.656 | ||
| S115 | 0.185 | 0.703 | 0.089 | 0.480 | ||
| S111 | 0.185 | 1.007 | 0.057 | 0.798 | ||
| S430 | 0.183 | 1.033 | 0.109 | 1.032 | ||
| S444 | 0.182 | 0.926 | 0.333 | 0.836 | ||
| S487 | 0.182 | 0.625 | 0.068 | 0.559 | ||
| S486 | 0.182 | 0.780 | 0.036 | 0.609 | ||
| S398 | 0.182 | 0.663 | 0.078 | 0.656 | ||
| S370 | 0.182 | 0.745 | 0.026 | 0.739 | ||
| S253 | 0.182 | 0.701 | 0.026 | 0.609 | ||
| S252 | 0.182 | 0.775 | 0.109 | 0.618 | ||
| S249 | 0.182 | 0.768 | 0.036 | 0.693 | ||
| S193 | 0.182 | 0.625 | 0.073 | 0.523 | ||
| S186 | 0.182 | 0.706 | 0.104 | 0.588 | ||
| S150 | 0.182 | 1.545 | 0.031 | 1.134 | ||
| S137 | 0.182 | 0.722 | 0.042 | 0.524 | ||
| S104 | 0.182 | 0.656 | 0.073 | 0.522 | ||
| S026 | 0.182 | 0.934 | 0.036 | 0.732 | ||
| S017 | 0.180 | 0.606 | 0.068 | 0.534 | ||
| S467 | 0.180 | 1.071 | 0.078 | 0.735 | ||
| S459 | 0.180 | 0.751 | 0.047 | 0.609 | ||
| S449 | 0.180 | 0.829 | 0.099 | 0.682 | ||
| S344 | 0.180 | 0.699 | 0.026 | 0.807 | ||
| S263 | 0.180 | 0.671 | 0.089 | 0.554 | ||
| S254 | 0.180 | 0.862 | 0.052 | 0.708 | ||
| S225 | 0.180 | 0.803 | 0.057 | 0.587 | ||
| S197 | 0.180 | 0.765 | 0.005 | 0.743 | ||
| S121 | 0.180 | 0.508 | 0.052 | 0.480 | ||
| S040 | 0.180 | 0.929 | 0.026 | 0.829 | ||
| S089 | 0.178 | 0.888 | 0.031 | 0.603 | ||
| S414 | 0.177 | 1.065 | 0.042 | 0.880 | ||
| S386 | 0.177 | 0.781 | 0.047 | 0.623 | ||
| S189 | 0.177 | 1.288 | 0.042 | 0.724 | ||
| S044 | 0.177 | 0.665 | 0.016 | 0.642 | ||
| S035 | 0.177 | 0.523 | 0.099 | 0.497 | ||
| S030 | 0.177 | 0.718 | 0.068 | 0.546 | ||
| S013 | 0.177 | 0.706 | 0.062 | 0.563 | ||
| S470 | 0.174 | 0.644 | 0.042 | 0.605 | ||
| S443 | 0.174 | 0.516 | 0.047 | 0.653 | ||
| S399 | 0.174 | 0.733 | 0.042 | 0.692 | ||
| S353 | 0.174 | 0.544 | 0.083 | 0.451 | ||
| S349 | 0.174 | 0.631 | 0.021 | 0.963 | ||
| S310 | 0.174 | 0.692 | 0.021 | 0.607 | ||
| S286 | 0.174 | 0.708 | 0.047 | 0.652 | ||
| S269 | 0.174 | 0.642 | 0.047 | 0.559 | ||
| S228 | 0.174 | 0.572 | 0.036 | 0.485 | ||
| S222 | 0.174 | 0.765 | 0.078 | 0.627 | ||
| S161 | 0.174 | 0.626 | 0.115 | 0.562 | ||
| S076 | 0.174 | 0.655 | 0.031 | 0.671 | ||
| S034 | 0.174 | 0.792 | 0.099 | 0.645 | ||
| S022 | 0.174 | 0.601 | 0.078 | 0.500 | ||
| S474 | 0.172 | 1.003 | 0.073 | 0.865 | ||
| S435 | 0.172 | 0.757 | 0.010 | 0.663 | ||
| S434 | 0.172 | 0.895 | 0.031 | 0.861 | ||
| S410 | 0.172 | 0.762 | 0.068 | 0.586 | ||
| S374 | 0.172 | 1.247 | 0.078 | 0.603 | ||
| S360 | 0.172 | 0.770 | 0.021 | 0.570 | ||
| S351 | 0.172 | 0.797 | 0.036 | 0.768 | ||
| S338 | 0.172 | 0.859 | 0.010 | 0.731 | ||
| S326 | 0.172 | 0.614 | 0.026 | 0.537 | ||
| S280 | 0.172 | 0.676 | 0.062 | 0.498 | ||
| S262 | 0.172 | 0.587 | 0.109 | 0.529 | ||
| S230 | 0.172 | 0.952 | 0.047 | 1.729 | ||
| S216 | 0.172 | 0.681 | 0.089 | 0.687 | ||
| S183 | 0.172 | 1.014 | 0.016 | 1.340 | ||
| S158 | 0.172 | 0.631 | 0.073 | 0.575 | ||
| S065 | 0.172 | 0.811 | 0.021 | 0.605 | ||
| S062 | 0.172 | 0.708 | 0.042 | 0.641 | ||
| S314 | 0.169 | 0.651 | 0.021 | 0.560 | ||
| S209 | 0.169 | 0.564 | 0.079 | 0.509 | ||
| S476 | 0.167 | 0.612 | 0.057 | 0.514 | ||
| S468 | 0.167 | 0.829 | 0.021 | 0.643 | ||
| S450 | 0.167 | 0.733 | 0.047 | 0.530 | ||
| S445 | 0.167 | 0.452 | 0.104 | 0.405 | ||
| S442 | 0.167 | 0.829 | 0.005 | 0.638 | ||
| S397 | 0.167 | 0.633 | 0.125 | 0.613 | ||
| S309 | 0.167 | 0.940 | 0.062 | 0.815 | ||
| S264 | 0.167 | 0.902 | 0.036 | 0.675 | ||
| S245 | 0.167 | 0.697 | 0.057 | 0.579 | ||
| S220 | 0.167 | 0.763 | 0.010 | 0.766 | ||
| S205 | 0.167 | 1.010 | 0.005 | 0.740 | ||
| S181 | 0.167 | 0.746 | 0.062 | 0.736 | ||
| S143 | 0.167 | 0.972 | 0.036 | 0.770 | ||
| S038 | 0.167 | 0.691 | 0.115 | 0.618 | ||
| S033 | 0.167 | 0.752 | 0.083 | 0.698 | ||
| S024 | 0.167 | 0.735 | 0.042 | 0.756 | ||
| S020 | 0.167 | 0.820 | 0.073 | 0.833 | ||
| S480 | 0.164 | 0.861 | 0.016 | 0.662 | ||
| S465 | 0.164 | 0.876 | 0.057 | 0.896 | ||
| S447 | 0.164 | 0.840 | 0.005 | 0.689 | ||
| S439 | 0.164 | 0.652 | 0.062 | 0.554 | ||
| S292 | 0.164 | 0.790 | 0.068 | 0.672 | ||
| S284 | 0.164 | 0.922 | 0.021 | 0.731 | ||
| S258 | 0.164 | 0.756 | 0.068 | 0.514 | ||
| S221 | 0.164 | 0.765 | 0.062 | 0.652 | ||
| S210 | 0.164 | 0.978 | 0.005 | 0.645 | ||
| S050 | 0.164 | 1.025 | 0.016 | 0.714 | ||
| S009 | 0.164 | 0.833 | 0.010 | 0.633 | ||
| S438 | 0.163 | 1.790 | 0.016 | 1.229 | ||
| S455 | 0.161 | 1.022 | 0.005 | 0.944 | ||
| S429 | 0.161 | 1.266 | 0.031 | 0.905 | ||
| S421 | 0.161 | 0.718 | 0.052 | 0.672 | ||
| S361 | 0.161 | 0.757 | 0.016 | 0.556 | ||
| S352 | 0.161 | 0.657 | 0.042 | 0.576 | ||
| S313 | 0.161 | 0.711 | 0.036 | 0.602 | ||
| S201 | 0.161 | 1.008 | 0.036 | 0.712 | ||
| S196 | 0.161 | 0.788 | 0.016 | 0.545 | ||
| S054 | 0.161 | 0.576 | 0.057 | 0.535 | ||
| S039 | 0.161 | 0.524 | 0.083 | 0.517 | ||
| S004 | 0.161 | 1.051 | 0.010 | 0.800 | ||
| S127 | 0.160 | 1.396 | 0.021 | 1.066 | ||
| S011 | 0.159 | 0.755 | 0.042 | 0.657 | 0.687 | 0.185 |
| S489 | 0.159 | 0.692 | 0.026 | 0.576 | ||
| S413 | 0.159 | 0.909 | 0.057 | 0.689 | ||
| S380 | 0.159 | 0.610 | 0.083 | 0.628 | ||
| S376 | 0.159 | 0.683 | 0.099 | 0.465 | ||
| S372 | 0.159 | 0.685 | 0.026 | 0.556 | ||
| S363 | 0.159 | 1.236 | 0.099 | 0.918 | ||
| S308 | 0.159 | 0.605 | 0.016 | 0.536 | ||
| S302 | 0.159 | 0.995 | 0.021 | 0.696 | ||
| S212 | 0.159 | 0.702 | 0.042 | 0.663 | ||
| S194 | 0.159 | 0.755 | 0.073 | 0.666 | ||
| S071 | 0.159 | 0.748 | 0.016 | 0.836 | ||
| S187 | 0.157 | 1.300 | 0.073 | 0.828 | ||
| S395 | 0.156 | 0.612 | 0.036 | 0.530 | ||
| S268 | 0.156 | 0.646 | 0.047 | 0.630 | ||
| S255 | 0.156 | 0.647 | 0.042 | 0.574 | ||
| S215 | 0.156 | 0.657 | 0.047 | 0.538 | ||
| S114 | 0.156 | 0.571 | 0.042 | 0.528 | ||
| S068 | 0.156 | 1.011 | 0.052 | 0.624 | ||
| S291 | 0.154 | 1.003 | 0.026 | 0.837 | ||
| S275 | 0.154 | 0.565 | 0.036 | 0.546 | ||
| S234 | 0.154 | 0.954 | 0.026 | 0.664 | ||
| S032 | 0.154 | 0.550 | 0.031 | 0.585 | ||
| S420 | 0.151 | 1.205 | 0.146 | 1.147 | ||
| S473 | 0.151 | 0.837 | 0.104 | 0.621 | ||
| S423 | 0.151 | 0.961 | 0.036 | 0.577 | ||
| S403 | 0.151 | 1.077 | 0.052 | 0.502 | ||
| S379 | 0.151 | 0.554 | 0.073 | 0.494 | ||
| S315 | 0.151 | 0.728 | 0.047 | 1.003 | ||
| S119 | 0.149 | 1.968 | 0.021 | 1.564 | ||
| S498 | 0.148 | 0.715 | 0.042 | 0.587 | ||
| S482 | 0.148 | 0.772 | 0.031 | 0.500 | ||
| S436 | 0.148 | 0.660 | 0.036 | 0.550 | ||
| S391 | 0.148 | 0.810 | 0.031 | 0.639 | ||
| S261 | 0.148 | 0.714 | 0.031 | 0.674 | ||
| S165 | 0.148 | 0.908 | 0.021 | 0.679 | ||
| S155 | 0.148 | 0.688 | 0.042 | 0.838 | ||
| S147 | 0.148 | 0.946 | 0.016 | 0.682 | ||
| S088 | 0.148 | 1.056 | 0.026 | 0.878 | ||
| S077 | 0.148 | 0.884 | 0.057 | 1.019 | ||
| S493 | 0.146 | 0.733 | 0.026 | 0.550 | ||
| S462 | 0.146 | 0.901 | 0.042 | 0.780 | ||
| S440 | 0.146 | 1.028 | 0.036 | 0.817 | ||
| S369 | 0.146 | 0.680 | 0.036 | 0.536 | ||
| S256 | 0.146 | 0.716 | 0.021 | 0.594 | ||
| S240 | 0.146 | 0.768 | 0.042 | 0.536 | ||
| S236 | 0.146 | 0.861 | 0.000 | 0.635 | ||
| S213 | 0.146 | 0.653 | 0.036 | 0.549 | ||
| S074 | 0.146 | 0.730 | 0.052 | 0.533 | ||
| S058 | 0.146 | 0.609 | 0.036 | 0.596 | ||
| S057 | 0.146 | 0.645 | 0.021 | 0.562 | ||
| S491 | 0.143 | 0.593 | 0.068 | 0.525 | ||
| S355 | 0.143 | 0.737 | 0.026 | 0.698 | ||
| S339 | 0.143 | 0.859 | 0.068 | 0.831 | ||
| S288 | 0.143 | 0.881 | 0.016 | 0.691 | ||
| S084 | 0.143 | 0.932 | 0.000 | 0.966 | ||
| S471 | 0.141 | 0.844 | 0.057 | 0.799 | ||
| S404 | 0.141 | 1.462 | 0.010 | 1.049 | ||
| S343 | 0.141 | 0.761 | 0.010 | 0.686 | ||
| S325 | 0.141 | 1.068 | 0.016 | 0.859 | ||
| S317 | 0.141 | 0.692 | 0.026 | 0.620 | ||
| S273 | 0.141 | 0.699 | 0.094 | 0.677 | ||
| S202 | 0.141 | 0.641 | 0.062 | 0.513 | ||
| S164 | 0.141 | 0.512 | 0.010 | 0.629 | ||
| S142 | 0.141 | 0.518 | 0.083 | 0.485 | ||
| S045 | 0.141 | 0.730 | 0.010 | 0.912 | ||
| S392 | 0.138 | 0.738 | 0.042 | 0.727 | ||
| S387 | 0.138 | 0.679 | 0.036 | 0.637 | ||
| S368 | 0.138 | 0.854 | 0.047 | 0.707 | ||
| S274 | 0.138 | 0.731 | 0.036 | 0.738 | ||
| S179 | 0.138 | 0.738 | 0.026 | 0.725 | ||
| S168 | 0.138 | 0.631 | 0.078 | 0.646 | ||
| S160 | 0.138 | 0.640 | 0.057 | 0.526 | ||
| S138 | 0.138 | 0.888 | 0.031 | 0.629 | ||
| S340 | 0.136 | 1.142 | 0.047 | 1.066 | ||
| S428 | 0.135 | 0.736 | 0.068 | 0.569 | ||
| S381 | 0.135 | 0.794 | 0.021 | 0.679 | ||
| S118 | 0.135 | 1.150 | 0.005 | 0.974 | ||
| S015 | 0.135 | 0.580 | 0.047 | 0.643 | ||
| S098 | 0.134 | 0.993 | 0.021 | 0.849 | ||
| S464 | 0.133 | 0.690 | 0.073 | 0.582 | ||
| S461 | 0.133 | 0.692 | 0.042 | 0.569 | ||
| S394 | 0.133 | 0.642 | 0.021 | 0.590 | ||
| S219 | 0.133 | 0.907 | 0.016 | 0.718 | ||
| S148 | 0.133 | 0.764 | 0.062 | 0.778 | ||
| S095 | 0.133 | 1.531 | 0.021 | 0.931 | ||
| S500 | 0.130 | 1.020 | 0.016 | 0.753 | ||
| S460 | 0.130 | 0.647 | 0.036 | 0.583 | ||
| S427 | 0.130 | 0.672 | 0.036 | 0.601 | ||
| S243 | 0.130 | 0.705 | 0.000 | 0.676 | ||
| S208 | 0.130 | 1.116 | 0.026 | 0.804 | ||
| S185 | 0.130 | 0.802 | 0.005 | 0.579 | ||
| S180 | 0.130 | 0.742 | 0.031 | 0.616 | ||
| S171 | 0.130 | 1.159 | 0.036 | 0.856 | ||
| S091 | 0.130 | 0.673 | 0.031 | 0.561 | ||
| S081 | 0.130 | 0.733 | 0.021 | 0.718 | ||
| S075 | 0.130 | 1.089 | 0.068 | 0.890 | ||
| S060 | 0.130 | 0.947 | 0.042 | 0.695 | ||
| S041 | 0.130 | 0.751 | 0.052 | 0.672 | ||
| S446 | 0.128 | 0.838 | 0.010 | 0.838 | ||
| S365 | 0.128 | 0.828 | 0.000 | 0.658 | ||
| S357 | 0.128 | 0.837 | 0.000 | 0.636 | ||
| S318 | 0.128 | 1.066 | 0.000 | 0.955 | ||
| S231 | 0.128 | 0.784 | 0.016 | 0.644 | ||
| S051 | 0.128 | 0.753 | 0.016 | 0.569 | ||
| S037 | 0.128 | 0.965 | 0.047 | 0.756 | ||
| S018 | 0.128 | 0.816 | 0.052 | 0.682 | ||
| S016 | 0.128 | 0.678 | 0.021 | 0.605 | ||
| S061 | 0.126 | 1.281 | 0.021 | 0.593 | ||
| S463 | 0.125 | 0.841 | 0.026 | 0.659 | ||
| S431 | 0.125 | 0.664 | 0.036 | 0.772 | ||
| S416 | 0.125 | 1.056 | 0.026 | 1.011 | ||
| S141 | 0.125 | 0.657 | 0.062 | 0.572 | ||
| S055 | 0.125 | 0.756 | 0.057 | 0.553 | ||
| S031 | 0.125 | 0.780 | 0.016 | 0.669 | ||
| S005 | 0.125 | 0.877 | 0.000 | 0.999 | ||
| S182 | 0.123 | 1.166 | 0.031 | 0.846 | ||
| S010 | 0.122 | 1.152 | 0.068 | 1.439 | 0.968 | 0.159 |
| S415 | 0.122 | 1.296 | 0.021 | 0.780 | ||
| S271 | 0.122 | 1.322 | 0.026 | 0.993 | ||
| S238 | 0.122 | 0.965 | 0.031 | 0.738 | ||
| S099 | 0.122 | 0.691 | 0.016 | 0.696 | ||
| S027 | 0.122 | 0.730 | 0.010 | 0.706 | ||
| S426 | 0.120 | 0.930 | 0.005 | 0.711 | ||
| S393 | 0.120 | 0.681 | 0.031 | 0.537 | ||
| S378 | 0.120 | 1.135 | 0.016 | 0.781 | ||
| S134 | 0.120 | 0.688 | 0.021 | 0.635 | ||
| S433 | 0.117 | 1.244 | 0.016 | 0.732 | ||
| S354 | 0.117 | 0.920 | 0.016 | 0.648 | ||
| S204 | 0.117 | 0.719 | 0.026 | 0.623 | ||
| S169 | 0.117 | 1.828 | 0.036 | 1.431 | ||
| S028 | 0.117 | 0.887 | 0.052 | 0.775 | ||
| S241 | 0.115 | 0.740 | 0.109 | 0.587 | ||
| S176 | 0.115 | 1.080 | 0.016 | 0.606 | ||
| S106 | 0.115 | 0.581 | 0.016 | 0.543 | ||
| S056 | 0.115 | 0.664 | 0.052 | 0.568 | ||
| S432 | 0.112 | 1.023 | 0.005 | 0.869 | ||
| S364 | 0.112 | 0.829 | 0.068 | 0.607 | ||
| S184 | 0.111 | 1.733 | 0.016 | 1.934 | ||
| S457 | 0.109 | 0.834 | 0.047 | 0.694 | ||
| S382 | 0.109 | 0.938 | 0.052 | 0.920 | ||
| S297 | 0.109 | 0.788 | 0.036 | 0.593 | ||
| S159 | 0.109 | 0.738 | 0.036 | 0.724 | ||
| S401 | 0.107 | 1.062 | 0.010 | 0.683 | ||
| S069 | 0.107 | 0.893 | 0.010 | 0.859 | ||
| S400 | 0.104 | 0.915 | 0.031 | 0.811 | ||
| S257 | 0.104 | 1.620 | 0.016 | 1.348 | ||
| S078 | 0.104 | 1.021 | 0.000 | 1.016 | ||
| S492 | 0.102 | 0.754 | 0.010 | 0.698 | ||
| S412 | 0.099 | 0.782 | 0.026 | 0.672 | ||
| S406 | 0.096 | 0.748 | 0.010 | 0.537 | ||
| S322 | 0.096 | 0.690 | 0.016 | 0.599 | ||
| S293 | 0.096 | 0.965 | 0.031 | 1.032 | ||
| S090 | 0.096 | 0.809 | 0.026 | 0.610 | ||
| S049 | 0.096 | 0.968 | 0.042 | 0.990 | ||
| S170 | 0.094 | 0.970 | 0.032 | 0.891 | ||
| S451 | 0.094 | 0.884 | 0.021 | 0.816 | ||
| S029 | 0.094 | 0.924 | 0.068 | 0.875 | ||
| S012 | 0.094 | 0.928 | 0.016 | 0.699 | ||
| S327 | 0.086 | 0.933 | 0.005 | 0.690 |
plot_distribution <- function(data) {
data |>
estimate_density(select = "RT", at = c("Participant", "Task", "Block")) |>
group_by(Participant) |>
normalize(select = "y") |>
ungroup() |>
mutate(
# Participant = fct_relevel(Participant, as.character(table$Participant)),
color = case_when(
Participant %in% outliers_perceptual & Task == "Perceptual" ~ "red",
Participant %in% outliers_illusion1 & Task == "Illusion_Session1" ~ "red",
Participant %in% outliers_illusion2 & Task == "Illusion_Session2" ~ "red",
Participant %in% outliers_perceptual_block2 & Task == "Perceptual" ~ "orange",
Participant %in% outliers_illusion1_block2 & Task == "Illusion_Session1" ~ "orange",
TRUE ~ "blue"
),
Task = fct_recode(Task,
"Illusion task (session 1)" = "Illusion_Session1",
# "Illusion task (session 2)" = "Illusion_Session2",
"Perceptual task" = "Perceptual"
)
) |>
ggplot(aes(x = x, y = y)) +
geom_area(data = normalize(estimate_density(data, select = "RT"), select = "y"), alpha = 0.2) +
geom_line(aes(color = color, group = interaction(Participant, Block), linetype = Block), size = 0.5) +
# geom_vline(xintercept = 0.125, linetype = "dashed", color = "red", size = 0.5) +
scale_color_manual(values = c("red" = "#F44336", "orange" = "#FF9800", "blue" = "blue"), guide = "none") +
scale_x_continuous(expand = c(0, 0)) +
scale_y_continuous(expand = c(0, 0)) +
coord_cartesian(xlim = c(0, 2)) +
theme_modern() +
theme(axis.text.y = element_blank(),
axis.line.y = element_blank(),
strip.text.y = element_text(size = rel(0.6))) +
facet_grid(Participant ~ Task) +
labs(y = "", x = "Reaction Time (s)")
}
p <- list()
for(i in 1:5) {
p[[i]] <- data |>
filter((as.numeric(gsub("\\D", "", Participant)) > (i - 1) * 100) & (as.numeric(gsub("\\D", "", Participant)) <= 100 * i)) |>
plot_distribution()
}
p <- patchwork::wrap_plots(p, ncol=5, nrow=1) + plot_layout(guides = "collect")
ggsave("figures/1_Preprocessing-outliers_RT.png", p, width = 7*5, height = 24, dpi = 150)
knitr::include_graphics("figures/1_Preprocessing-outliers_RT.png")
illusion1 <- filter(illusion1, !Participant %in% outliers_illusion1)
illusion1 <- filter(illusion1, !(Participant %in% outliers_illusion1_block2 & Block == 2))
illusion2 <- filter(illusion2, !Participant %in% outliers_illusion2)
perceptual <- filter(perceptual, !Participant %in% outliers_perceptual)
perceptual <- filter(perceptual, !(Participant %in% outliers_perceptual_block2 & Block == 2))
For each block, we computed the error rate and, if more than 50%, we discarded the whole block (as it likely indicates that instructions got mixed up, for instance participants were selecting the smaller instead of the bigger circle).
data <- rbind(illusion1, illusion2, perceptual) |>
group_by(Participant, Task, Illusion_Type, Block) |>
summarize(ErrorRate_per_block = sum(Error) / n()) |>
ungroup() |>
arrange(desc(ErrorRate_per_block))
data |>
estimate_density(at = c("Task", "Illusion_Type", "Block"), method = "KernSmooth") |>
ggplot(aes(x = x, y = y)) +
geom_line(aes(color = Illusion_Type, linetype = Block)) +
geom_vline(xintercept = 0.5, linetype = "dashed") +
scale_x_continuous(expand = c(0, 0)) +
scale_y_continuous(expand = c(0, 0)) +
scale_color_manual(values = c("Ebbinghaus" = "#2196F3", "MullerLyer" = "#4CAF50", "VerticalHorizontal" = "#FF5722")) +
labs(y = "Distribution", x = "Error Rate") +
theme_modern() +
facet_wrap(~Task)
remove_badblocks <- function(df) {
n <- nrow(df)
df <- df |>
group_by(Participant, Illusion_Type, Block) |>
mutate(ErrorRate_per_block = sum(Error) / n()) |>
ungroup() |>
filter(ErrorRate_per_block < 0.5) |>
select(-ErrorRate_per_block)
text <- paste0(
"We removed ",
n - nrow(df),
" (",
insight::format_value((n - nrow(df)) / n, as_percent = TRUE),
") trials belonging to bad blocks."
)
list(data = df, text = text)
}
out <- remove_badblocks(illusion1)
print(paste("Illusion (session 1):", out$text))
[1] “Illusion (session 1): We removed 704 (0.37%) trials belonging to bad blocks.”
illusion1 <- out$data
out <- remove_badblocks(illusion2)
print(paste("Illusion (session 2):", out$text))
[1] “Illusion (session 2): We removed 64 (4.17%) trials belonging to bad blocks.”
illusion2 <- out$data
out <- remove_badblocks(perceptual)
print(paste("Perceptual task:", out$text))
[1] “Perceptual task: We removed 544 (0.57%) trials belonging to bad blocks.”
perceptual <- out$data
check_trials <- function(df) {
data <- df |>
mutate(Outlier = ifelse(RT >= 10, TRUE, FALSE)) |>
group_by(Participant) |>
mutate(Outlier = ifelse(RT < 0.125 | standardize(RT, robust = TRUE) > 4, TRUE, Outlier)) |>
ungroup()
p1 <- data |>
filter(RT < 10) |>
estimate_density(select = "RT", at = "Participant") |>
group_by(Participant) |>
normalize(select = "y") |>
ungroup() |>
merge(data |>
group_by(Participant) |>
mutate(Threshold = median(RT) + 4 * mad(RT)) |>
filter(Error == 0) |>
summarize(Threshold = mean(Threshold))) |>
mutate(Outlier = ifelse(x >= Threshold, TRUE, FALSE)) |>
ggplot(aes(x = x, y = y)) +
geom_area(data = normalize(estimate_density(filter(data, RT < 10), select = "RT"), select = "y"), alpha = 0.2) +
geom_line(aes(color = Participant, linetype = Outlier), alpha = 0.2) +
geom_vline(xintercept = c(125), linetype = "dashed", color = "red") +
scale_color_material_d("rainbow", guide = "none") +
scale_x_continuous(expand = c(0, 0)) +
scale_y_continuous(expand = c(0, 0)) +
guides(linetype = "none") +
coord_cartesian(xlim = c(0, 5)) +
theme_modern() +
theme(axis.text.y = element_blank()) +
labs(y = "", x = "Reaction Time (s)")
p2 <- data |>
group_by(Participant) |>
summarize(Outlier = sum(Outlier) / nrow(illusion1)) |>
mutate(Participant = fct_reorder(Participant, Outlier)) |>
ggplot(aes(x = Participant, y = Outlier)) +
geom_bar(stat = "identity", aes(fill = Participant)) +
scale_fill_material_d("rainbow", guide = "none") +
scale_x_discrete(expand = c(0, 0)) +
scale_y_continuous(expand = c(0, 0), labels = scales::percent) +
see::theme_modern() +
theme(axis.text.x = element_blank()) +
labs(y = "Percentage of outlier trials")
text <- paste0(
"We removed ",
sum(data$Outlier),
" (",
insight::format_value(sum(data$Outlier) / nrow(data), as_percent = TRUE),
") outlier trials (125 ms < RT < 4 MAD above median)."
)
data <- filter(data, Outlier == FALSE)
data$Outlier <- NULL
list(p = p1 / p2, data = data, text = text)
}
out <- check_trials(illusion1)
print(paste("Illusion (session 1):", out$text))
[1] “Illusion (session 1): We removed 6735 (3.55%) outlier trials (125 ms < RT < 4 MAD above median).”
out$p
illusion1 <- out$data
out <- check_trials(illusion2)
print(paste("Illusion (session 2):", out$text))
[1] “Illusion (session 2): We removed 49 (3.33%) outlier trials (125 ms < RT < 4 MAD above median).”
out$p
illusion2 <- out$data
out <- check_trials(perceptual)
print(paste("Perceptual task:", out$text))
[1] “Perceptual task: We removed 4664 (4.92%) outlier trials (125 ms < RT < 4 MAD above median).”
out$p